huggingface-hub 0.35.0rc0__py3-none-any.whl → 1.0.0rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (127) hide show
  1. huggingface_hub/__init__.py +46 -45
  2. huggingface_hub/_commit_api.py +28 -28
  3. huggingface_hub/_commit_scheduler.py +11 -8
  4. huggingface_hub/_inference_endpoints.py +8 -8
  5. huggingface_hub/_jobs_api.py +176 -20
  6. huggingface_hub/_local_folder.py +1 -1
  7. huggingface_hub/_login.py +13 -39
  8. huggingface_hub/_oauth.py +10 -14
  9. huggingface_hub/_snapshot_download.py +14 -28
  10. huggingface_hub/_space_api.py +4 -4
  11. huggingface_hub/_tensorboard_logger.py +13 -14
  12. huggingface_hub/_upload_large_folder.py +120 -13
  13. huggingface_hub/_webhooks_payload.py +3 -3
  14. huggingface_hub/_webhooks_server.py +2 -2
  15. huggingface_hub/cli/_cli_utils.py +2 -2
  16. huggingface_hub/cli/auth.py +8 -6
  17. huggingface_hub/cli/cache.py +18 -20
  18. huggingface_hub/cli/download.py +4 -4
  19. huggingface_hub/cli/hf.py +2 -5
  20. huggingface_hub/cli/jobs.py +599 -22
  21. huggingface_hub/cli/lfs.py +4 -4
  22. huggingface_hub/cli/repo.py +11 -7
  23. huggingface_hub/cli/repo_files.py +2 -2
  24. huggingface_hub/cli/upload.py +4 -4
  25. huggingface_hub/cli/upload_large_folder.py +3 -3
  26. huggingface_hub/commands/_cli_utils.py +2 -2
  27. huggingface_hub/commands/delete_cache.py +13 -13
  28. huggingface_hub/commands/download.py +4 -13
  29. huggingface_hub/commands/lfs.py +4 -4
  30. huggingface_hub/commands/repo_files.py +2 -2
  31. huggingface_hub/commands/scan_cache.py +1 -1
  32. huggingface_hub/commands/tag.py +1 -3
  33. huggingface_hub/commands/upload.py +4 -4
  34. huggingface_hub/commands/upload_large_folder.py +3 -3
  35. huggingface_hub/commands/user.py +4 -5
  36. huggingface_hub/community.py +5 -5
  37. huggingface_hub/constants.py +3 -41
  38. huggingface_hub/dataclasses.py +16 -19
  39. huggingface_hub/errors.py +42 -29
  40. huggingface_hub/fastai_utils.py +8 -9
  41. huggingface_hub/file_download.py +162 -259
  42. huggingface_hub/hf_api.py +841 -616
  43. huggingface_hub/hf_file_system.py +98 -62
  44. huggingface_hub/hub_mixin.py +37 -57
  45. huggingface_hub/inference/_client.py +257 -325
  46. huggingface_hub/inference/_common.py +110 -124
  47. huggingface_hub/inference/_generated/_async_client.py +307 -432
  48. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +3 -3
  49. huggingface_hub/inference/_generated/types/base.py +10 -7
  50. huggingface_hub/inference/_generated/types/chat_completion.py +18 -16
  51. huggingface_hub/inference/_generated/types/depth_estimation.py +2 -2
  52. huggingface_hub/inference/_generated/types/document_question_answering.py +2 -2
  53. huggingface_hub/inference/_generated/types/feature_extraction.py +2 -2
  54. huggingface_hub/inference/_generated/types/fill_mask.py +2 -2
  55. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -3
  56. huggingface_hub/inference/_generated/types/summarization.py +2 -2
  57. huggingface_hub/inference/_generated/types/table_question_answering.py +4 -4
  58. huggingface_hub/inference/_generated/types/text2text_generation.py +2 -2
  59. huggingface_hub/inference/_generated/types/text_generation.py +10 -10
  60. huggingface_hub/inference/_generated/types/text_to_video.py +2 -2
  61. huggingface_hub/inference/_generated/types/token_classification.py +2 -2
  62. huggingface_hub/inference/_generated/types/translation.py +2 -2
  63. huggingface_hub/inference/_generated/types/zero_shot_classification.py +2 -2
  64. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +2 -2
  65. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +1 -3
  66. huggingface_hub/inference/_mcp/_cli_hacks.py +3 -3
  67. huggingface_hub/inference/_mcp/agent.py +3 -3
  68. huggingface_hub/inference/_mcp/cli.py +1 -1
  69. huggingface_hub/inference/_mcp/constants.py +2 -3
  70. huggingface_hub/inference/_mcp/mcp_client.py +58 -30
  71. huggingface_hub/inference/_mcp/types.py +10 -7
  72. huggingface_hub/inference/_mcp/utils.py +11 -7
  73. huggingface_hub/inference/_providers/__init__.py +4 -2
  74. huggingface_hub/inference/_providers/_common.py +49 -25
  75. huggingface_hub/inference/_providers/black_forest_labs.py +6 -6
  76. huggingface_hub/inference/_providers/cohere.py +3 -3
  77. huggingface_hub/inference/_providers/fal_ai.py +52 -21
  78. huggingface_hub/inference/_providers/featherless_ai.py +4 -4
  79. huggingface_hub/inference/_providers/fireworks_ai.py +3 -3
  80. huggingface_hub/inference/_providers/hf_inference.py +28 -20
  81. huggingface_hub/inference/_providers/hyperbolic.py +4 -4
  82. huggingface_hub/inference/_providers/nebius.py +10 -10
  83. huggingface_hub/inference/_providers/novita.py +5 -5
  84. huggingface_hub/inference/_providers/nscale.py +4 -4
  85. huggingface_hub/inference/_providers/replicate.py +15 -15
  86. huggingface_hub/inference/_providers/sambanova.py +6 -6
  87. huggingface_hub/inference/_providers/together.py +7 -7
  88. huggingface_hub/lfs.py +20 -31
  89. huggingface_hub/repocard.py +18 -18
  90. huggingface_hub/repocard_data.py +56 -56
  91. huggingface_hub/serialization/__init__.py +0 -1
  92. huggingface_hub/serialization/_base.py +9 -9
  93. huggingface_hub/serialization/_dduf.py +7 -7
  94. huggingface_hub/serialization/_torch.py +28 -28
  95. huggingface_hub/utils/__init__.py +10 -4
  96. huggingface_hub/utils/_auth.py +5 -5
  97. huggingface_hub/utils/_cache_manager.py +31 -31
  98. huggingface_hub/utils/_deprecation.py +1 -1
  99. huggingface_hub/utils/_dotenv.py +25 -21
  100. huggingface_hub/utils/_fixes.py +0 -10
  101. huggingface_hub/utils/_git_credential.py +4 -4
  102. huggingface_hub/utils/_headers.py +7 -29
  103. huggingface_hub/utils/_http.py +366 -208
  104. huggingface_hub/utils/_pagination.py +4 -4
  105. huggingface_hub/utils/_paths.py +5 -5
  106. huggingface_hub/utils/_runtime.py +16 -13
  107. huggingface_hub/utils/_safetensors.py +21 -21
  108. huggingface_hub/utils/_subprocess.py +9 -9
  109. huggingface_hub/utils/_telemetry.py +3 -3
  110. huggingface_hub/utils/_typing.py +25 -5
  111. huggingface_hub/utils/_validators.py +53 -72
  112. huggingface_hub/utils/_xet.py +16 -16
  113. huggingface_hub/utils/_xet_progress_reporting.py +32 -11
  114. huggingface_hub/utils/insecure_hashlib.py +3 -9
  115. huggingface_hub/utils/tqdm.py +3 -3
  116. {huggingface_hub-0.35.0rc0.dist-info → huggingface_hub-1.0.0rc0.dist-info}/METADATA +18 -29
  117. huggingface_hub-1.0.0rc0.dist-info/RECORD +161 -0
  118. huggingface_hub/inference_api.py +0 -217
  119. huggingface_hub/keras_mixin.py +0 -500
  120. huggingface_hub/repository.py +0 -1477
  121. huggingface_hub/serialization/_tensorflow.py +0 -95
  122. huggingface_hub/utils/_hf_folder.py +0 -68
  123. huggingface_hub-0.35.0rc0.dist-info/RECORD +0 -166
  124. {huggingface_hub-0.35.0rc0.dist-info → huggingface_hub-1.0.0rc0.dist-info}/LICENSE +0 -0
  125. {huggingface_hub-0.35.0rc0.dist-info → huggingface_hub-1.0.0rc0.dist-info}/WHEEL +0 -0
  126. {huggingface_hub-0.35.0rc0.dist-info → huggingface_hub-1.0.0rc0.dist-info}/entry_points.txt +0 -0
  127. {huggingface_hub-0.35.0rc0.dist-info → huggingface_hub-1.0.0rc0.dist-info}/top_level.txt +0 -0
@@ -46,7 +46,7 @@ import sys
46
46
  from typing import TYPE_CHECKING
47
47
 
48
48
 
49
- __version__ = "0.35.0.rc0"
49
+ __version__ = "1.0.0.rc0"
50
50
 
51
51
  # Alphabetical order of definitions is ensured in tests
52
52
  # WARNING: any comment added in this dictionary definition will be lost when
@@ -182,6 +182,8 @@ _SUBMOD_ATTRS = {
182
182
  "create_inference_endpoint_from_catalog",
183
183
  "create_pull_request",
184
184
  "create_repo",
185
+ "create_scheduled_job",
186
+ "create_scheduled_uv_job",
185
187
  "create_tag",
186
188
  "create_webhook",
187
189
  "dataset_info",
@@ -192,6 +194,7 @@ _SUBMOD_ATTRS = {
192
194
  "delete_folder",
193
195
  "delete_inference_endpoint",
194
196
  "delete_repo",
197
+ "delete_scheduled_job",
195
198
  "delete_space_secret",
196
199
  "delete_space_storage",
197
200
  "delete_space_variable",
@@ -214,11 +217,11 @@ _SUBMOD_ATTRS = {
214
217
  "get_safetensors_metadata",
215
218
  "get_space_runtime",
216
219
  "get_space_variables",
217
- "get_token_permission",
218
220
  "get_user_overview",
219
221
  "get_webhook",
220
222
  "grant_access",
221
223
  "inspect_job",
224
+ "inspect_scheduled_job",
222
225
  "list_accepted_access_requests",
223
226
  "list_collections",
224
227
  "list_datasets",
@@ -259,6 +262,7 @@ _SUBMOD_ATTRS = {
259
262
  "request_space_storage",
260
263
  "restart_space",
261
264
  "resume_inference_endpoint",
265
+ "resume_scheduled_job",
262
266
  "revision_exists",
263
267
  "run_as_future",
264
268
  "run_job",
@@ -267,12 +271,12 @@ _SUBMOD_ATTRS = {
267
271
  "set_space_sleep_time",
268
272
  "space_info",
269
273
  "super_squash_history",
274
+ "suspend_scheduled_job",
270
275
  "unlike",
271
276
  "update_collection_item",
272
277
  "update_collection_metadata",
273
278
  "update_inference_endpoint",
274
279
  "update_repo_settings",
275
- "update_repo_visibility",
276
280
  "update_webhook",
277
281
  "upload_file",
278
282
  "upload_folder",
@@ -465,15 +469,6 @@ _SUBMOD_ATTRS = {
465
469
  "inference._mcp.mcp_client": [
466
470
  "MCPClient",
467
471
  ],
468
- "inference_api": [
469
- "InferenceApi",
470
- ],
471
- "keras_mixin": [
472
- "KerasModelHubMixin",
473
- "from_pretrained_keras",
474
- "push_to_hub_keras",
475
- "save_pretrained_keras",
476
- ],
477
472
  "repocard": [
478
473
  "DatasetCard",
479
474
  "ModelCard",
@@ -491,12 +486,8 @@ _SUBMOD_ATTRS = {
491
486
  "ModelCardData",
492
487
  "SpaceCardData",
493
488
  ],
494
- "repository": [
495
- "Repository",
496
- ],
497
489
  "serialization": [
498
490
  "StateDictSplit",
499
- "get_tf_storage_size",
500
491
  "get_torch_storage_id",
501
492
  "get_torch_storage_size",
502
493
  "load_state_dict_from_file",
@@ -504,7 +495,6 @@ _SUBMOD_ATTRS = {
504
495
  "save_torch_model",
505
496
  "save_torch_state_dict",
506
497
  "split_state_dict_into_shards_factory",
507
- "split_tf_state_dict_into_shards",
508
498
  "split_torch_state_dict_into_shards",
509
499
  ],
510
500
  "serialization._dduf": [
@@ -514,6 +504,8 @@ _SUBMOD_ATTRS = {
514
504
  "read_dduf_file",
515
505
  ],
516
506
  "utils": [
507
+ "ASYNC_CLIENT_FACTORY_T",
508
+ "CLIENT_FACTORY_T",
517
509
  "CacheNotFound",
518
510
  "CachedFileInfo",
519
511
  "CachedRepoInfo",
@@ -521,14 +513,19 @@ _SUBMOD_ATTRS = {
521
513
  "CorruptedCacheException",
522
514
  "DeleteCacheStrategy",
523
515
  "HFCacheInfo",
524
- "HfFolder",
516
+ "HfHubAsyncTransport",
517
+ "HfHubTransport",
525
518
  "cached_assets_path",
526
- "configure_http_backend",
519
+ "close_client",
527
520
  "dump_environment_info",
521
+ "get_async_session",
528
522
  "get_session",
529
523
  "get_token",
524
+ "hf_raise_for_status",
530
525
  "logging",
531
526
  "scan_cache_dir",
527
+ "set_async_client_factory",
528
+ "set_client_factory",
532
529
  ],
533
530
  }
534
531
 
@@ -544,6 +541,7 @@ _SUBMOD_ATTRS = {
544
541
  # ```
545
542
 
546
543
  __all__ = [
544
+ "ASYNC_CLIENT_FACTORY_T",
547
545
  "Agent",
548
546
  "AsyncInferenceClient",
549
547
  "AudioClassificationInput",
@@ -558,6 +556,7 @@ __all__ = [
558
556
  "AutomaticSpeechRecognitionOutput",
559
557
  "AutomaticSpeechRecognitionOutputChunk",
560
558
  "AutomaticSpeechRecognitionParameters",
559
+ "CLIENT_FACTORY_T",
561
560
  "CONFIG_NAME",
562
561
  "CacheNotFound",
563
562
  "CachedFileInfo",
@@ -646,7 +645,8 @@ __all__ = [
646
645
  "HfFileSystemFile",
647
646
  "HfFileSystemResolvedPath",
648
647
  "HfFileSystemStreamFile",
649
- "HfFolder",
648
+ "HfHubAsyncTransport",
649
+ "HfHubTransport",
650
650
  "ImageClassificationInput",
651
651
  "ImageClassificationOutputElement",
652
652
  "ImageClassificationOutputTransform",
@@ -668,7 +668,6 @@ __all__ = [
668
668
  "ImageToVideoOutput",
669
669
  "ImageToVideoParameters",
670
670
  "ImageToVideoTargetSize",
671
- "InferenceApi",
672
671
  "InferenceClient",
673
672
  "InferenceEndpoint",
674
673
  "InferenceEndpointError",
@@ -680,7 +679,6 @@ __all__ = [
680
679
  "JobOwner",
681
680
  "JobStage",
682
681
  "JobStatus",
683
- "KerasModelHubMixin",
684
682
  "MCPClient",
685
683
  "ModelCard",
686
684
  "ModelCardData",
@@ -705,7 +703,6 @@ __all__ = [
705
703
  "REPO_TYPE_SPACE",
706
704
  "RepoCard",
707
705
  "RepoUrl",
708
- "Repository",
709
706
  "SentenceSimilarityInput",
710
707
  "SentenceSimilarityInputData",
711
708
  "SpaceCard",
@@ -818,8 +815,8 @@ __all__ = [
818
815
  "cancel_access_request",
819
816
  "cancel_job",
820
817
  "change_discussion_status",
818
+ "close_client",
821
819
  "comment_discussion",
822
- "configure_http_backend",
823
820
  "create_branch",
824
821
  "create_collection",
825
822
  "create_commit",
@@ -828,6 +825,8 @@ __all__ = [
828
825
  "create_inference_endpoint_from_catalog",
829
826
  "create_pull_request",
830
827
  "create_repo",
828
+ "create_scheduled_job",
829
+ "create_scheduled_uv_job",
831
830
  "create_tag",
832
831
  "create_webhook",
833
832
  "dataset_info",
@@ -838,6 +837,7 @@ __all__ = [
838
837
  "delete_folder",
839
838
  "delete_inference_endpoint",
840
839
  "delete_repo",
840
+ "delete_scheduled_job",
841
841
  "delete_space_secret",
842
842
  "delete_space_storage",
843
843
  "delete_space_variable",
@@ -853,7 +853,7 @@ __all__ = [
853
853
  "fetch_job_logs",
854
854
  "file_exists",
855
855
  "from_pretrained_fastai",
856
- "from_pretrained_keras",
856
+ "get_async_session",
857
857
  "get_collection",
858
858
  "get_dataset_tags",
859
859
  "get_discussion_details",
@@ -867,9 +867,7 @@ __all__ = [
867
867
  "get_session",
868
868
  "get_space_runtime",
869
869
  "get_space_variables",
870
- "get_tf_storage_size",
871
870
  "get_token",
872
- "get_token_permission",
873
871
  "get_torch_storage_id",
874
872
  "get_torch_storage_size",
875
873
  "get_user_overview",
@@ -877,7 +875,9 @@ __all__ = [
877
875
  "grant_access",
878
876
  "hf_hub_download",
879
877
  "hf_hub_url",
878
+ "hf_raise_for_status",
880
879
  "inspect_job",
880
+ "inspect_scheduled_job",
881
881
  "interpreter_login",
882
882
  "list_accepted_access_requests",
883
883
  "list_collections",
@@ -922,7 +922,6 @@ __all__ = [
922
922
  "permanently_delete_lfs_files",
923
923
  "preupload_lfs_files",
924
924
  "push_to_hub_fastai",
925
- "push_to_hub_keras",
926
925
  "read_dduf_file",
927
926
  "reject_access_request",
928
927
  "rename_discussion",
@@ -933,29 +932,30 @@ __all__ = [
933
932
  "request_space_storage",
934
933
  "restart_space",
935
934
  "resume_inference_endpoint",
935
+ "resume_scheduled_job",
936
936
  "revision_exists",
937
937
  "run_as_future",
938
938
  "run_job",
939
939
  "run_uv_job",
940
- "save_pretrained_keras",
941
940
  "save_torch_model",
942
941
  "save_torch_state_dict",
943
942
  "scale_to_zero_inference_endpoint",
944
943
  "scan_cache_dir",
944
+ "set_async_client_factory",
945
+ "set_client_factory",
945
946
  "set_space_sleep_time",
946
947
  "snapshot_download",
947
948
  "space_info",
948
949
  "split_state_dict_into_shards_factory",
949
- "split_tf_state_dict_into_shards",
950
950
  "split_torch_state_dict_into_shards",
951
951
  "super_squash_history",
952
+ "suspend_scheduled_job",
952
953
  "try_to_load_from_cache",
953
954
  "unlike",
954
955
  "update_collection_item",
955
956
  "update_collection_metadata",
956
957
  "update_inference_endpoint",
957
958
  "update_repo_settings",
958
- "update_repo_visibility",
959
959
  "update_webhook",
960
960
  "upload_file",
961
961
  "upload_folder",
@@ -1190,6 +1190,8 @@ if TYPE_CHECKING: # pragma: no cover
1190
1190
  create_inference_endpoint_from_catalog, # noqa: F401
1191
1191
  create_pull_request, # noqa: F401
1192
1192
  create_repo, # noqa: F401
1193
+ create_scheduled_job, # noqa: F401
1194
+ create_scheduled_uv_job, # noqa: F401
1193
1195
  create_tag, # noqa: F401
1194
1196
  create_webhook, # noqa: F401
1195
1197
  dataset_info, # noqa: F401
@@ -1200,6 +1202,7 @@ if TYPE_CHECKING: # pragma: no cover
1200
1202
  delete_folder, # noqa: F401
1201
1203
  delete_inference_endpoint, # noqa: F401
1202
1204
  delete_repo, # noqa: F401
1205
+ delete_scheduled_job, # noqa: F401
1203
1206
  delete_space_secret, # noqa: F401
1204
1207
  delete_space_storage, # noqa: F401
1205
1208
  delete_space_variable, # noqa: F401
@@ -1222,11 +1225,11 @@ if TYPE_CHECKING: # pragma: no cover
1222
1225
  get_safetensors_metadata, # noqa: F401
1223
1226
  get_space_runtime, # noqa: F401
1224
1227
  get_space_variables, # noqa: F401
1225
- get_token_permission, # noqa: F401
1226
1228
  get_user_overview, # noqa: F401
1227
1229
  get_webhook, # noqa: F401
1228
1230
  grant_access, # noqa: F401
1229
1231
  inspect_job, # noqa: F401
1232
+ inspect_scheduled_job, # noqa: F401
1230
1233
  list_accepted_access_requests, # noqa: F401
1231
1234
  list_collections, # noqa: F401
1232
1235
  list_datasets, # noqa: F401
@@ -1267,6 +1270,7 @@ if TYPE_CHECKING: # pragma: no cover
1267
1270
  request_space_storage, # noqa: F401
1268
1271
  restart_space, # noqa: F401
1269
1272
  resume_inference_endpoint, # noqa: F401
1273
+ resume_scheduled_job, # noqa: F401
1270
1274
  revision_exists, # noqa: F401
1271
1275
  run_as_future, # noqa: F401
1272
1276
  run_job, # noqa: F401
@@ -1275,12 +1279,12 @@ if TYPE_CHECKING: # pragma: no cover
1275
1279
  set_space_sleep_time, # noqa: F401
1276
1280
  space_info, # noqa: F401
1277
1281
  super_squash_history, # noqa: F401
1282
+ suspend_scheduled_job, # noqa: F401
1278
1283
  unlike, # noqa: F401
1279
1284
  update_collection_item, # noqa: F401
1280
1285
  update_collection_metadata, # noqa: F401
1281
1286
  update_inference_endpoint, # noqa: F401
1282
1287
  update_repo_settings, # noqa: F401
1283
- update_repo_visibility, # noqa: F401
1284
1288
  update_webhook, # noqa: F401
1285
1289
  upload_file, # noqa: F401
1286
1290
  upload_folder, # noqa: F401
@@ -1467,13 +1471,6 @@ if TYPE_CHECKING: # pragma: no cover
1467
1471
  )
1468
1472
  from .inference._mcp.agent import Agent # noqa: F401
1469
1473
  from .inference._mcp.mcp_client import MCPClient # noqa: F401
1470
- from .inference_api import InferenceApi # noqa: F401
1471
- from .keras_mixin import (
1472
- KerasModelHubMixin, # noqa: F401
1473
- from_pretrained_keras, # noqa: F401
1474
- push_to_hub_keras, # noqa: F401
1475
- save_pretrained_keras, # noqa: F401
1476
- )
1477
1474
  from .repocard import (
1478
1475
  DatasetCard, # noqa: F401
1479
1476
  ModelCard, # noqa: F401
@@ -1491,10 +1488,8 @@ if TYPE_CHECKING: # pragma: no cover
1491
1488
  ModelCardData, # noqa: F401
1492
1489
  SpaceCardData, # noqa: F401
1493
1490
  )
1494
- from .repository import Repository # noqa: F401
1495
1491
  from .serialization import (
1496
1492
  StateDictSplit, # noqa: F401
1497
- get_tf_storage_size, # noqa: F401
1498
1493
  get_torch_storage_id, # noqa: F401
1499
1494
  get_torch_storage_size, # noqa: F401
1500
1495
  load_state_dict_from_file, # noqa: F401
@@ -1502,7 +1497,6 @@ if TYPE_CHECKING: # pragma: no cover
1502
1497
  save_torch_model, # noqa: F401
1503
1498
  save_torch_state_dict, # noqa: F401
1504
1499
  split_state_dict_into_shards_factory, # noqa: F401
1505
- split_tf_state_dict_into_shards, # noqa: F401
1506
1500
  split_torch_state_dict_into_shards, # noqa: F401
1507
1501
  )
1508
1502
  from .serialization._dduf import (
@@ -1512,6 +1506,8 @@ if TYPE_CHECKING: # pragma: no cover
1512
1506
  read_dduf_file, # noqa: F401
1513
1507
  )
1514
1508
  from .utils import (
1509
+ ASYNC_CLIENT_FACTORY_T, # noqa: F401
1510
+ CLIENT_FACTORY_T, # noqa: F401
1515
1511
  CachedFileInfo, # noqa: F401
1516
1512
  CachedRepoInfo, # noqa: F401
1517
1513
  CachedRevisionInfo, # noqa: F401
@@ -1519,12 +1515,17 @@ if TYPE_CHECKING: # pragma: no cover
1519
1515
  CorruptedCacheException, # noqa: F401
1520
1516
  DeleteCacheStrategy, # noqa: F401
1521
1517
  HFCacheInfo, # noqa: F401
1522
- HfFolder, # noqa: F401
1518
+ HfHubAsyncTransport, # noqa: F401
1519
+ HfHubTransport, # noqa: F401
1523
1520
  cached_assets_path, # noqa: F401
1524
- configure_http_backend, # noqa: F401
1521
+ close_client, # noqa: F401
1525
1522
  dump_environment_info, # noqa: F401
1523
+ get_async_session, # noqa: F401
1526
1524
  get_session, # noqa: F401
1527
1525
  get_token, # noqa: F401
1526
+ hf_raise_for_status, # noqa: F401
1528
1527
  logging, # noqa: F401
1529
1528
  scan_cache_dir, # noqa: F401
1529
+ set_async_client_factory, # noqa: F401
1530
+ set_client_factory, # noqa: F401
1530
1531
  )
@@ -11,7 +11,7 @@ from contextlib import contextmanager
11
11
  from dataclasses import dataclass, field
12
12
  from itertools import groupby
13
13
  from pathlib import Path, PurePosixPath
14
- from typing import TYPE_CHECKING, Any, BinaryIO, Dict, Iterable, Iterator, List, Literal, Optional, Tuple, Union
14
+ from typing import TYPE_CHECKING, Any, BinaryIO, Iterable, Iterator, Literal, Optional, Union
15
15
 
16
16
  from tqdm.contrib.concurrent import thread_map
17
17
 
@@ -235,7 +235,7 @@ class CommitOperationAdd:
235
235
  config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
236
236
 
237
237
  >>> with operation.as_file(with_tqdm=True) as file:
238
- ... requests.put(..., data=file)
238
+ ... httpx.put(..., data=file)
239
239
  config.json: 100%|█████████████████████████| 8.19k/8.19k [00:02<00:00, 3.72kB/s]
240
240
  ```
241
241
  """
@@ -306,7 +306,7 @@ def _validate_path_in_repo(path_in_repo: str) -> str:
306
306
  CommitOperation = Union[CommitOperationAdd, CommitOperationCopy, CommitOperationDelete]
307
307
 
308
308
 
309
- def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
309
+ def _warn_on_overwriting_operations(operations: list[CommitOperation]) -> None:
310
310
  """
311
311
  Warn user when a list of operations is expected to overwrite itself in a single
312
312
  commit.
@@ -321,7 +321,7 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
321
321
  delete before upload) but can happen if a user deletes an entire folder and then
322
322
  add new files to it.
323
323
  """
324
- nb_additions_per_path: Dict[str, int] = defaultdict(int)
324
+ nb_additions_per_path: dict[str, int] = defaultdict(int)
325
325
  for operation in operations:
326
326
  path_in_repo = operation.path_in_repo
327
327
  if isinstance(operation, CommitOperationAdd):
@@ -355,10 +355,10 @@ def _warn_on_overwriting_operations(operations: List[CommitOperation]) -> None:
355
355
  @validate_hf_hub_args
356
356
  def _upload_lfs_files(
357
357
  *,
358
- additions: List[CommitOperationAdd],
358
+ additions: list[CommitOperationAdd],
359
359
  repo_type: str,
360
360
  repo_id: str,
361
- headers: Dict[str, str],
361
+ headers: dict[str, str],
362
362
  endpoint: Optional[str] = None,
363
363
  num_threads: int = 5,
364
364
  revision: Optional[str] = None,
@@ -377,7 +377,7 @@ def _upload_lfs_files(
377
377
  repo_id (`str`):
378
378
  A namespace (user or an organization) and a repo name separated
379
379
  by a `/`.
380
- headers (`Dict[str, str]`):
380
+ headers (`dict[str, str]`):
381
381
  Headers to use for the request, including authorization headers and user agent.
382
382
  num_threads (`int`, *optional*):
383
383
  The number of concurrent threads to use when uploading. Defaults to 5.
@@ -389,13 +389,13 @@ def _upload_lfs_files(
389
389
  If an upload failed for any reason
390
390
  [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
391
391
  If the server returns malformed responses
392
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError)
392
+ [`HfHubHTTPError`]
393
393
  If the LFS batch endpoint returned an HTTP error.
394
394
  """
395
395
  # Step 1: retrieve upload instructions from the LFS batch endpoint.
396
396
  # Upload instructions are retrieved by chunk of 256 files to avoid reaching
397
397
  # the payload limit.
398
- batch_actions: List[Dict] = []
398
+ batch_actions: list[dict] = []
399
399
  for chunk in chunk_iterable(additions, chunk_size=UPLOAD_BATCH_MAX_NUM_FILES):
400
400
  batch_actions_chunk, batch_errors_chunk = post_lfs_batch_info(
401
401
  upload_infos=[op.upload_info for op in chunk],
@@ -466,10 +466,10 @@ def _upload_lfs_files(
466
466
  @validate_hf_hub_args
467
467
  def _upload_xet_files(
468
468
  *,
469
- additions: List[CommitOperationAdd],
469
+ additions: list[CommitOperationAdd],
470
470
  repo_type: str,
471
471
  repo_id: str,
472
- headers: Dict[str, str],
472
+ headers: dict[str, str],
473
473
  endpoint: Optional[str] = None,
474
474
  revision: Optional[str] = None,
475
475
  create_pr: Optional[bool] = None,
@@ -486,7 +486,7 @@ def _upload_xet_files(
486
486
  repo_id (`str`):
487
487
  A namespace (user or an organization) and a repo name separated
488
488
  by a `/`.
489
- headers (`Dict[str, str]`):
489
+ headers (`dict[str, str]`):
490
490
  Headers to use for the request, including authorization headers and user agent.
491
491
  endpoint: (`str`, *optional*):
492
492
  The endpoint to use for the xetcas service. Defaults to `constants.ENDPOINT`.
@@ -500,7 +500,7 @@ def _upload_xet_files(
500
500
  If an upload failed for any reason.
501
501
  [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
502
502
  If the server returns malformed responses or if the user is unauthorized to upload to xet storage.
503
- [`HTTPError`](https://requests.readthedocs.io/en/latest/api/#requests.HTTPError)
503
+ [`HfHubHTTPError`]
504
504
  If the LFS batch endpoint returned an HTTP error.
505
505
 
506
506
  **How it works:**
@@ -555,7 +555,7 @@ def _upload_xet_files(
555
555
  xet_endpoint = xet_connection_info.endpoint
556
556
  access_token_info = (xet_connection_info.access_token, xet_connection_info.expiration_unix_epoch)
557
557
 
558
- def token_refresher() -> Tuple[str, int]:
558
+ def token_refresher() -> tuple[str, int]:
559
559
  new_xet_connection = fetch_xet_connection_info_from_repo_info(
560
560
  token_type=XetTokenType.WRITE,
561
561
  repo_id=repo_id,
@@ -628,7 +628,7 @@ def _fetch_upload_modes(
628
628
  additions: Iterable[CommitOperationAdd],
629
629
  repo_type: str,
630
630
  repo_id: str,
631
- headers: Dict[str, str],
631
+ headers: dict[str, str],
632
632
  revision: str,
633
633
  endpoint: Optional[str] = None,
634
634
  create_pr: bool = False,
@@ -647,7 +647,7 @@ def _fetch_upload_modes(
647
647
  repo_id (`str`):
648
648
  A namespace (user or an organization) and a repo name separated
649
649
  by a `/`.
650
- headers (`Dict[str, str]`):
650
+ headers (`dict[str, str]`):
651
651
  Headers to use for the request, including authorization headers and user agent.
652
652
  revision (`str`):
653
653
  The git revision to upload the files to. Can be any valid git revision.
@@ -665,12 +665,12 @@ def _fetch_upload_modes(
665
665
  endpoint = endpoint if endpoint is not None else constants.ENDPOINT
666
666
 
667
667
  # Fetch upload mode (LFS or regular) chunk by chunk.
668
- upload_modes: Dict[str, UploadMode] = {}
669
- should_ignore_info: Dict[str, bool] = {}
670
- oid_info: Dict[str, Optional[str]] = {}
668
+ upload_modes: dict[str, UploadMode] = {}
669
+ should_ignore_info: dict[str, bool] = {}
670
+ oid_info: dict[str, Optional[str]] = {}
671
671
 
672
672
  for chunk in chunk_iterable(additions, 256):
673
- payload: Dict = {
673
+ payload: dict = {
674
674
  "files": [
675
675
  {
676
676
  "path": op.path_in_repo,
@@ -713,10 +713,10 @@ def _fetch_files_to_copy(
713
713
  copies: Iterable[CommitOperationCopy],
714
714
  repo_type: str,
715
715
  repo_id: str,
716
- headers: Dict[str, str],
716
+ headers: dict[str, str],
717
717
  revision: str,
718
718
  endpoint: Optional[str] = None,
719
- ) -> Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]]:
719
+ ) -> dict[tuple[str, Optional[str]], Union["RepoFile", bytes]]:
720
720
  """
721
721
  Fetch information about the files to copy.
722
722
 
@@ -732,12 +732,12 @@ def _fetch_files_to_copy(
732
732
  repo_id (`str`):
733
733
  A namespace (user or an organization) and a repo name separated
734
734
  by a `/`.
735
- headers (`Dict[str, str]`):
735
+ headers (`dict[str, str]`):
736
736
  Headers to use for the request, including authorization headers and user agent.
737
737
  revision (`str`):
738
738
  The git revision to upload the files to. Can be any valid git revision.
739
739
 
740
- Returns: `Dict[Tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
740
+ Returns: `dict[tuple[str, Optional[str]], Union[RepoFile, bytes]]]`
741
741
  Key is the file path and revision of the file to copy.
742
742
  Value is the raw content as bytes (for regular files) or the file information as a RepoFile (for LFS files).
743
743
 
@@ -750,9 +750,9 @@ def _fetch_files_to_copy(
750
750
  from .hf_api import HfApi, RepoFolder
751
751
 
752
752
  hf_api = HfApi(endpoint=endpoint, headers=headers)
753
- files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
753
+ files_to_copy: dict[tuple[str, Optional[str]], Union["RepoFile", bytes]] = {}
754
754
  # Store (path, revision) -> oid mapping
755
- oid_info: Dict[Tuple[str, Optional[str]], Optional[str]] = {}
755
+ oid_info: dict[tuple[str, Optional[str]], Optional[str]] = {}
756
756
  # 1. Fetch OIDs for destination paths in batches.
757
757
  dest_paths = [op.path_in_repo for op in copies]
758
758
  for offset in range(0, len(dest_paths), FETCH_LFS_BATCH_SIZE):
@@ -812,11 +812,11 @@ def _fetch_files_to_copy(
812
812
 
813
813
  def _prepare_commit_payload(
814
814
  operations: Iterable[CommitOperation],
815
- files_to_copy: Dict[Tuple[str, Optional[str]], Union["RepoFile", bytes]],
815
+ files_to_copy: dict[tuple[str, Optional[str]], Union["RepoFile", bytes]],
816
816
  commit_message: str,
817
817
  commit_description: Optional[str] = None,
818
818
  parent_commit: Optional[str] = None,
819
- ) -> Iterable[Dict[str, Any]]:
819
+ ) -> Iterable[dict[str, Any]]:
820
820
  """
821
821
  Builds the payload to POST to the `/commit` API of the Hub.
822
822
 
@@ -7,7 +7,7 @@ from dataclasses import dataclass
7
7
  from io import SEEK_END, SEEK_SET, BytesIO
8
8
  from pathlib import Path
9
9
  from threading import Lock, Thread
10
- from typing import Dict, List, Optional, Union
10
+ from typing import Optional, Union
11
11
 
12
12
  from .hf_api import DEFAULT_IGNORE_PATTERNS, CommitInfo, CommitOperationAdd, HfApi
13
13
  from .utils import filter_repo_objects
@@ -53,9 +53,9 @@ class CommitScheduler:
53
53
  Whether to make the repo private. If `None` (default), the repo will be public unless the organization's default is private. This value is ignored if the repo already exists.
54
54
  token (`str`, *optional*):
55
55
  The token to use to commit to the repo. Defaults to the token saved on the machine.
56
- allow_patterns (`List[str]` or `str`, *optional*):
56
+ allow_patterns (`list[str]` or `str`, *optional*):
57
57
  If provided, only files matching at least one pattern are uploaded.
58
- ignore_patterns (`List[str]` or `str`, *optional*):
58
+ ignore_patterns (`list[str]` or `str`, *optional*):
59
59
  If provided, files matching any of the patterns are not uploaded.
60
60
  squash_history (`bool`, *optional*):
61
61
  Whether to squash the history of the repo after each commit. Defaults to `False`. Squashing commits is
@@ -108,8 +108,8 @@ class CommitScheduler:
108
108
  revision: Optional[str] = None,
109
109
  private: Optional[bool] = None,
110
110
  token: Optional[str] = None,
111
- allow_patterns: Optional[Union[List[str], str]] = None,
112
- ignore_patterns: Optional[Union[List[str], str]] = None,
111
+ allow_patterns: Optional[Union[list[str], str]] = None,
112
+ ignore_patterns: Optional[Union[list[str], str]] = None,
113
113
  squash_history: bool = False,
114
114
  hf_api: Optional["HfApi"] = None,
115
115
  ) -> None:
@@ -138,7 +138,7 @@ class CommitScheduler:
138
138
  self.token = token
139
139
 
140
140
  # Keep track of already uploaded files
141
- self.last_uploaded: Dict[Path, float] = {} # key is local path, value is timestamp
141
+ self.last_uploaded: dict[Path, float] = {} # key is local path, value is timestamp
142
142
 
143
143
  # Scheduler
144
144
  if not every > 0:
@@ -232,7 +232,7 @@ class CommitScheduler:
232
232
  prefix = f"{self.path_in_repo.strip('/')}/" if self.path_in_repo else ""
233
233
 
234
234
  # Filter with pattern + filter out unchanged files + retrieve current file size
235
- files_to_upload: List[_FileToUpload] = []
235
+ files_to_upload: list[_FileToUpload] = []
236
236
  for relpath in filter_repo_objects(
237
237
  relpath_to_abspath.keys(), allow_patterns=self.allow_patterns, ignore_patterns=self.ignore_patterns
238
238
  ):
@@ -315,10 +315,13 @@ class PartialFileIO(BytesIO):
315
315
  return self._size_limit
316
316
 
317
317
  def __getattribute__(self, name: str):
318
- if name.startswith("_") or name in ("read", "tell", "seek"): # only 3 public methods supported
318
+ if name.startswith("_") or name in ("read", "tell", "seek", "fileno"): # only 4 public methods supported
319
319
  return super().__getattribute__(name)
320
320
  raise NotImplementedError(f"PartialFileIO does not support '{name}'.")
321
321
 
322
+ def fileno(self):
323
+ raise AttributeError("PartialFileIO does not have a fileno.")
324
+
322
325
  def tell(self) -> int:
323
326
  """Return the current file position."""
324
327
  return self._file.tell()